In [ ]:
import os
import tensorflow as tf
import keras.backend.tensorflow_backend as KTF

def get_session(gpu_fraction=0.3):
    '''Assume that you have 6GB of GPU memory and want to allocate ~2GB'''

    num_threads = os.environ.get('OMP_NUM_THREADS')
    gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=gpu_fraction)

    if num_threads:
        return tf.Session(config=tf.ConfigProto(
            gpu_options=gpu_options, intra_op_parallelism_threads=num_threads))
    else:
        return tf.Session(config=tf.ConfigProto(gpu_options=gpu_options))

get_session()

In [67]:
import argparse
import collections
import datetime
import enum
import glob
import logging
import numpy as np
import os
import random
import sys
import tempfile
import tensorflow as tf

from keras.applications.imagenet_utils import preprocess_input
from keras.applications.resnet50 import ResNet50
from keras.callbacks import ModelCheckpoint
from keras.models import Sequential                              
from keras.layers import Dense, Dropout, Flatten, Input, Reshape            
from keras.preprocessing import image

import pelops.const as const
from pelops.datasets.dgcars import DGCarsDataset
from pelops.utils import SetType, setup_custom_logger

In [68]:
train_dir_path = "./datasets/train/" 
val_dir_path = "./datasets/test/"
train_features_path = None
val_features_path = None
dataset_type = "DGCarsDataset"
conv_model_type = "ResNet50"
conv_model_name = "ResNet50"

nb_epoch = 10
dropout_rate = 0.5
batch_size = 32
seed = 11
img_height = 224
img_width = 224
img_dimension = 3
index_accuracy = 1

np.random.seed(seed)
random.seed(seed)

1. Extract features


In [69]:
train_datagen = image.ImageDataGenerator()
val_datagen = image.ImageDataGenerator()

train_generator = train_datagen.flow_from_directory(
    directory=train_dir_path,
    target_size=(img_height, img_width),
    seed=seed, 
    follow_links=True
)
val_generator = val_datagen.flow_from_directory(
    directory=val_dir_path,
    target_size=(img_height, img_width),
    seed=seed,
    follow_links=True
)

print("number of classes: {}".format(train_generator.nb_class))

print("number of images for training: {}".format(train_generator.nb_sample))
for i in train_generator:
    x, y = i
    print("shape of train x: {}, y: {}".format(x.shape, y.shape))
    break

# assumption: 151 images, therefore generator will output 32 * 151 images? 
print("number of images for validation: {}".format(val_generator.nb_sample))


Found 194 images belonging to 3 classes.
Found 151 images belonging to 3 classes.
number of classes: 3
number of images for training: 194
shape of train x: (32, 224, 224, 3), y: (32, 3)
number of images for validation: 151

In [70]:
model = ResNet50(
            include_top=False,
            weights="imagenet",
            input_tensor=Input(
                shape=(
                    img_height, 
                    img_width, 
                    img_dimension
                )
            )
        )

In [71]:
for layer in model.layers:
    layer.trainable = False

In [72]:
def __extract_features(generator, model, batch_size, set_type):
    feature_dirpath = "./features/"
    print("create a feature directory to store saved features: {}".format(feature_dirpath))
    if not os.path.exists(feature_dirpath):
        os.makedirs(feature_dirpath)

    print("extract features from convolutional model based on data")
    print("generator: {}_generator".format(set_type))
    print("batch_size: {}".format(batch_size))
    features = model.predict_generator(
        generator,
        batch_size
    )

    time_now = datetime.datetime.now().strftime("%Y%m%d_%H_%M_%S")
    features_filepath = feature_dirpath + "REALDEAL_{}_{}_{}_features_{}.npy".format(
        dataset_type,
        conv_model_type,
        set_type,
        time_now
    )
    print("save features to {}".format(features_filepath))
    np.save(open(features_filepath, "wb"), features)

    return features, features_filepath

In [73]:
train_features, train_features_path = __extract_features(train_generator, model, batch_size, "train")


create a feature directory to store saved features: ./features/
extract features from convolutional model based on data
generator: train_generator
batch_size: 32
save features to ./features/REALDEAL_DGCarsDataset_ResNet50_train_features_20170216_19_59_28.npy

In [11]:
#save features to ./features/REALDEAL_DGCarsDataset_ResNet50_train_features_20170214_23_34_31.npy

In [74]:
train_feature_path = "./features/REALDEAL_DGCarsDataset_ResNet50_train_features_20170214_23_34_31.npy"
train_features = np.load(open(train_feature_path, "rb"))

2. Train classifier based on features


In [115]:
checkpoint_dirpath = "./checkpoints/"
print("create a checkpoint directory to store saved checkpoints: {}".format(checkpoint_dirpath))
if not os.path.exists(checkpoint_dirpath):
    os.makedirs(checkpoint_dirpath)

checkpoint_filepath = \
    checkpoint_dirpath + \
    "{}_{}_features_".format(dataset_type, "classifier") + \
    "{epoch:02d}_{val_acc:.8f}.npy"

checkpoint = ModelCheckpoint(
    checkpoint_filepath, 
    monitor="val_acc", 
    save_best_only=True, 
    mode="max"
)
callbacks_list = [checkpoint]


create a checkpoint directory to store saved checkpoints: ./checkpoints/

In [76]:
nb_classes = train_generator.nb_class
nb_features = model.output_shape[-1] # same as train_features.shape[-1]
nb_hidden_layers = int(round(np.mean([nb_features, nb_classes])))
print("{} -> [hidden layer {}] -> {}\n".format(nb_features, nb_hidden_layers, nb_classes))

top_model = Sequential()
top_model.add(Dense(nb_hidden_layers, activation="relu", input_shape=train_features.shape[1:]))
top_model.add(Flatten())
top_model.add(Dense(nb_classes, activation="softmax"))


2048 -> [hidden layer 1026] -> 3


In [77]:
top_model.compile(
    loss="categorical_crossentropy",
    optimizer="adam",
    metrics=["accuracy"]
)

In [78]:
count = 0
for i in train_generator:
    x, y = i
    count = count + 1
    print("count: {}, x.shape: {}, y.shape: {}".format(count, x.shape, y.shape))
    print("y: {}".format(y))
    break


count: 1, x.shape: (32, 224, 224, 3), y.shape: (32, 3)
y: [[ 1.  0.  0.]
 [ 0.  0.  1.]
 [ 0.  0.  1.]
 [ 1.  0.  0.]
 [ 0.  1.  0.]
 [ 0.  0.  1.]
 [ 0.  1.  0.]
 [ 1.  0.  0.]
 [ 0.  0.  1.]
 [ 1.  0.  0.]
 [ 1.  0.  0.]
 [ 0.  1.  0.]
 [ 1.  0.  0.]
 [ 0.  0.  1.]
 [ 1.  0.  0.]
 [ 1.  0.  0.]
 [ 0.  0.  1.]
 [ 0.  0.  1.]
 [ 1.  0.  0.]
 [ 0.  0.  1.]
 [ 1.  0.  0.]
 [ 0.  1.  0.]
 [ 0.  0.  1.]
 [ 0.  0.  1.]
 [ 0.  0.  1.]
 [ 0.  0.  1.]
 [ 0.  1.  0.]
 [ 0.  1.  0.]
 [ 0.  1.  0.]
 [ 0.  0.  1.]
 [ 1.  0.  0.]
 [ 0.  0.  1.]]

In [79]:
count = 0
for x in train_features:
    count = count + 1
    print("count: {}, x.shape: {}".format(count, x.shape))


count: 1, x.shape: (1, 1, 2048)
count: 2, x.shape: (1, 1, 2048)
count: 3, x.shape: (1, 1, 2048)
count: 4, x.shape: (1, 1, 2048)
count: 5, x.shape: (1, 1, 2048)
count: 6, x.shape: (1, 1, 2048)
count: 7, x.shape: (1, 1, 2048)
count: 8, x.shape: (1, 1, 2048)
count: 9, x.shape: (1, 1, 2048)
count: 10, x.shape: (1, 1, 2048)
count: 11, x.shape: (1, 1, 2048)
count: 12, x.shape: (1, 1, 2048)
count: 13, x.shape: (1, 1, 2048)
count: 14, x.shape: (1, 1, 2048)
count: 15, x.shape: (1, 1, 2048)
count: 16, x.shape: (1, 1, 2048)
count: 17, x.shape: (1, 1, 2048)
count: 18, x.shape: (1, 1, 2048)
count: 19, x.shape: (1, 1, 2048)
count: 20, x.shape: (1, 1, 2048)
count: 21, x.shape: (1, 1, 2048)
count: 22, x.shape: (1, 1, 2048)
count: 23, x.shape: (1, 1, 2048)
count: 24, x.shape: (1, 1, 2048)
count: 25, x.shape: (1, 1, 2048)
count: 26, x.shape: (1, 1, 2048)
count: 27, x.shape: (1, 1, 2048)
count: 28, x.shape: (1, 1, 2048)
count: 29, x.shape: (1, 1, 2048)
count: 30, x.shape: (1, 1, 2048)
count: 31, x.shape: (1, 1, 2048)
count: 32, x.shape: (1, 1, 2048)
count: 33, x.shape: (1, 1, 2048)
count: 34, x.shape: (1, 1, 2048)
count: 35, x.shape: (1, 1, 2048)
count: 36, x.shape: (1, 1, 2048)
count: 37, x.shape: (1, 1, 2048)
count: 38, x.shape: (1, 1, 2048)
count: 39, x.shape: (1, 1, 2048)
count: 40, x.shape: (1, 1, 2048)
count: 41, x.shape: (1, 1, 2048)
count: 42, x.shape: (1, 1, 2048)
count: 43, x.shape: (1, 1, 2048)
count: 44, x.shape: (1, 1, 2048)
count: 45, x.shape: (1, 1, 2048)
count: 46, x.shape: (1, 1, 2048)
count: 47, x.shape: (1, 1, 2048)
count: 48, x.shape: (1, 1, 2048)
count: 49, x.shape: (1, 1, 2048)
count: 50, x.shape: (1, 1, 2048)
count: 51, x.shape: (1, 1, 2048)
count: 52, x.shape: (1, 1, 2048)
count: 53, x.shape: (1, 1, 2048)
count: 54, x.shape: (1, 1, 2048)
count: 55, x.shape: (1, 1, 2048)
count: 56, x.shape: (1, 1, 2048)
count: 57, x.shape: (1, 1, 2048)
count: 58, x.shape: (1, 1, 2048)
count: 59, x.shape: (1, 1, 2048)
count: 60, x.shape: (1, 1, 2048)
count: 61, x.shape: (1, 1, 2048)
count: 62, x.shape: (1, 1, 2048)
count: 63, x.shape: (1, 1, 2048)
count: 64, x.shape: (1, 1, 2048)
count: 65, x.shape: (1, 1, 2048)
count: 66, x.shape: (1, 1, 2048)
count: 67, x.shape: (1, 1, 2048)
count: 68, x.shape: (1, 1, 2048)
count: 69, x.shape: (1, 1, 2048)
count: 70, x.shape: (1, 1, 2048)
count: 71, x.shape: (1, 1, 2048)
count: 72, x.shape: (1, 1, 2048)
count: 73, x.shape: (1, 1, 2048)
count: 74, x.shape: (1, 1, 2048)
count: 75, x.shape: (1, 1, 2048)
count: 76, x.shape: (1, 1, 2048)
count: 77, x.shape: (1, 1, 2048)
count: 78, x.shape: (1, 1, 2048)
count: 79, x.shape: (1, 1, 2048)
count: 80, x.shape: (1, 1, 2048)
count: 81, x.shape: (1, 1, 2048)
count: 82, x.shape: (1, 1, 2048)
count: 83, x.shape: (1, 1, 2048)
count: 84, x.shape: (1, 1, 2048)
count: 85, x.shape: (1, 1, 2048)
count: 86, x.shape: (1, 1, 2048)
count: 87, x.shape: (1, 1, 2048)
count: 88, x.shape: (1, 1, 2048)
count: 89, x.shape: (1, 1, 2048)
count: 90, x.shape: (1, 1, 2048)
count: 91, x.shape: (1, 1, 2048)
count: 92, x.shape: (1, 1, 2048)
count: 93, x.shape: (1, 1, 2048)
count: 94, x.shape: (1, 1, 2048)
count: 95, x.shape: (1, 1, 2048)
count: 96, x.shape: (1, 1, 2048)
count: 97, x.shape: (1, 1, 2048)
count: 98, x.shape: (1, 1, 2048)
count: 99, x.shape: (1, 1, 2048)
count: 100, x.shape: (1, 1, 2048)
count: 101, x.shape: (1, 1, 2048)
count: 102, x.shape: (1, 1, 2048)
count: 103, x.shape: (1, 1, 2048)
count: 104, x.shape: (1, 1, 2048)
count: 105, x.shape: (1, 1, 2048)
count: 106, x.shape: (1, 1, 2048)
count: 107, x.shape: (1, 1, 2048)
count: 108, x.shape: (1, 1, 2048)
count: 109, x.shape: (1, 1, 2048)
count: 110, x.shape: (1, 1, 2048)
count: 111, x.shape: (1, 1, 2048)
count: 112, x.shape: (1, 1, 2048)
count: 113, x.shape: (1, 1, 2048)
count: 114, x.shape: (1, 1, 2048)
count: 115, x.shape: (1, 1, 2048)
count: 116, x.shape: (1, 1, 2048)
count: 117, x.shape: (1, 1, 2048)
count: 118, x.shape: (1, 1, 2048)
count: 119, x.shape: (1, 1, 2048)
count: 120, x.shape: (1, 1, 2048)
count: 121, x.shape: (1, 1, 2048)
count: 122, x.shape: (1, 1, 2048)
count: 123, x.shape: (1, 1, 2048)
count: 124, x.shape: (1, 1, 2048)
count: 125, x.shape: (1, 1, 2048)
count: 126, x.shape: (1, 1, 2048)
count: 127, x.shape: (1, 1, 2048)
count: 128, x.shape: (1, 1, 2048)
count: 129, x.shape: (1, 1, 2048)
count: 130, x.shape: (1, 1, 2048)
count: 131, x.shape: (1, 1, 2048)
count: 132, x.shape: (1, 1, 2048)
count: 133, x.shape: (1, 1, 2048)
count: 134, x.shape: (1, 1, 2048)
count: 135, x.shape: (1, 1, 2048)
count: 136, x.shape: (1, 1, 2048)
count: 137, x.shape: (1, 1, 2048)
count: 138, x.shape: (1, 1, 2048)
count: 139, x.shape: (1, 1, 2048)
count: 140, x.shape: (1, 1, 2048)
count: 141, x.shape: (1, 1, 2048)
count: 142, x.shape: (1, 1, 2048)
count: 143, x.shape: (1, 1, 2048)
count: 144, x.shape: (1, 1, 2048)
count: 145, x.shape: (1, 1, 2048)
count: 146, x.shape: (1, 1, 2048)
count: 147, x.shape: (1, 1, 2048)
count: 148, x.shape: (1, 1, 2048)
count: 149, x.shape: (1, 1, 2048)
count: 150, x.shape: (1, 1, 2048)
count: 151, x.shape: (1, 1, 2048)
count: 152, x.shape: (1, 1, 2048)
count: 153, x.shape: (1, 1, 2048)
count: 154, x.shape: (1, 1, 2048)
count: 155, x.shape: (1, 1, 2048)
count: 156, x.shape: (1, 1, 2048)
count: 157, x.shape: (1, 1, 2048)
count: 158, x.shape: (1, 1, 2048)
count: 159, x.shape: (1, 1, 2048)
count: 160, x.shape: (1, 1, 2048)
count: 161, x.shape: (1, 1, 2048)
count: 162, x.shape: (1, 1, 2048)
count: 163, x.shape: (1, 1, 2048)
count: 164, x.shape: (1, 1, 2048)
count: 165, x.shape: (1, 1, 2048)
count: 166, x.shape: (1, 1, 2048)
count: 167, x.shape: (1, 1, 2048)
count: 168, x.shape: (1, 1, 2048)
count: 169, x.shape: (1, 1, 2048)
count: 170, x.shape: (1, 1, 2048)
count: 171, x.shape: (1, 1, 2048)
count: 172, x.shape: (1, 1, 2048)
count: 173, x.shape: (1, 1, 2048)
count: 174, x.shape: (1, 1, 2048)
count: 175, x.shape: (1, 1, 2048)
count: 176, x.shape: (1, 1, 2048)
count: 177, x.shape: (1, 1, 2048)
count: 178, x.shape: (1, 1, 2048)
count: 179, x.shape: (1, 1, 2048)
count: 180, x.shape: (1, 1, 2048)
count: 181, x.shape: (1, 1, 2048)
count: 182, x.shape: (1, 1, 2048)
count: 183, x.shape: (1, 1, 2048)
count: 184, x.shape: (1, 1, 2048)
count: 185, x.shape: (1, 1, 2048)
count: 186, x.shape: (1, 1, 2048)
count: 187, x.shape: (1, 1, 2048)
count: 188, x.shape: (1, 1, 2048)
count: 189, x.shape: (1, 1, 2048)
count: 190, x.shape: (1, 1, 2048)
count: 191, x.shape: (1, 1, 2048)
count: 192, x.shape: (1, 1, 2048)
count: 193, x.shape: (1, 1, 2048)
count: 194, x.shape: (1, 1, 2048)

In [ ]:
# x.shape: (1, 224, 224, 3), y.shape: (1, 3)
# batch_size = 1

In [59]:
class_dictionary = train_generator.class_indices

for key, value in class_dictionary.items():
    print(key, value)


3 2
2 1
1 0

In [57]:
count = 0
for i in train_generator.classes:
    count = count + 1
    print("count: {}, i: {}".format(count, i))


count: 1, i: 0
count: 2, i: 0
count: 3, i: 0
count: 4, i: 0
count: 5, i: 0
count: 6, i: 0
count: 7, i: 0
count: 8, i: 0
count: 9, i: 0
count: 10, i: 0
count: 11, i: 0
count: 12, i: 0
count: 13, i: 0
count: 14, i: 0
count: 15, i: 0
count: 16, i: 0
count: 17, i: 0
count: 18, i: 0
count: 19, i: 0
count: 20, i: 0
count: 21, i: 0
count: 22, i: 0
count: 23, i: 0
count: 24, i: 0
count: 25, i: 0
count: 26, i: 0
count: 27, i: 0
count: 28, i: 0
count: 29, i: 0
count: 30, i: 0
count: 31, i: 0
count: 32, i: 0
count: 33, i: 0
count: 34, i: 0
count: 35, i: 0
count: 36, i: 0
count: 37, i: 0
count: 38, i: 0
count: 39, i: 0
count: 40, i: 0
count: 41, i: 0
count: 42, i: 0
count: 43, i: 0
count: 44, i: 0
count: 45, i: 0
count: 46, i: 0
count: 47, i: 0
count: 48, i: 0
count: 49, i: 0
count: 50, i: 0
count: 51, i: 0
count: 52, i: 0
count: 53, i: 0
count: 54, i: 0
count: 55, i: 0
count: 56, i: 0
count: 57, i: 0
count: 58, i: 0
count: 59, i: 0
count: 60, i: 0
count: 61, i: 0
count: 62, i: 0
count: 63, i: 0
count: 64, i: 1
count: 65, i: 1
count: 66, i: 1
count: 67, i: 1
count: 68, i: 1
count: 69, i: 1
count: 70, i: 1
count: 71, i: 1
count: 72, i: 1
count: 73, i: 1
count: 74, i: 1
count: 75, i: 1
count: 76, i: 1
count: 77, i: 1
count: 78, i: 1
count: 79, i: 1
count: 80, i: 1
count: 81, i: 1
count: 82, i: 1
count: 83, i: 1
count: 84, i: 1
count: 85, i: 1
count: 86, i: 1
count: 87, i: 1
count: 88, i: 1
count: 89, i: 1
count: 90, i: 1
count: 91, i: 1
count: 92, i: 1
count: 93, i: 1
count: 94, i: 1
count: 95, i: 1
count: 96, i: 1
count: 97, i: 1
count: 98, i: 1
count: 99, i: 1
count: 100, i: 1
count: 101, i: 1
count: 102, i: 1
count: 103, i: 1
count: 104, i: 1
count: 105, i: 1
count: 106, i: 1
count: 107, i: 1
count: 108, i: 1
count: 109, i: 1
count: 110, i: 1
count: 111, i: 1
count: 112, i: 1
count: 113, i: 1
count: 114, i: 1
count: 115, i: 1
count: 116, i: 1
count: 117, i: 1
count: 118, i: 1
count: 119, i: 1
count: 120, i: 2
count: 121, i: 2
count: 122, i: 2
count: 123, i: 2
count: 124, i: 2
count: 125, i: 2
count: 126, i: 2
count: 127, i: 2
count: 128, i: 2
count: 129, i: 2
count: 130, i: 2
count: 131, i: 2
count: 132, i: 2
count: 133, i: 2
count: 134, i: 2
count: 135, i: 2
count: 136, i: 2
count: 137, i: 2
count: 138, i: 2
count: 139, i: 2
count: 140, i: 2
count: 141, i: 2
count: 142, i: 2
count: 143, i: 2
count: 144, i: 2
count: 145, i: 2
count: 146, i: 2
count: 147, i: 2
count: 148, i: 2
count: 149, i: 2
count: 150, i: 2
count: 151, i: 2
count: 152, i: 2
count: 153, i: 2
count: 154, i: 2
count: 155, i: 2
count: 156, i: 2
count: 157, i: 2
count: 158, i: 2
count: 159, i: 2
count: 160, i: 2
count: 161, i: 2
count: 162, i: 2
count: 163, i: 2
count: 164, i: 2
count: 165, i: 2
count: 166, i: 2
count: 167, i: 2
count: 168, i: 2
count: 169, i: 2
count: 170, i: 2
count: 171, i: 2
count: 172, i: 2
count: 173, i: 2
count: 174, i: 2
count: 175, i: 2
count: 176, i: 2
count: 177, i: 2
count: 178, i: 2
count: 179, i: 2
count: 180, i: 2
count: 181, i: 2
count: 182, i: 2
count: 183, i: 2
count: 184, i: 2
count: 185, i: 2
count: 186, i: 2
count: 187, i: 2
count: 188, i: 2
count: 189, i: 2
count: 190, i: 2
count: 191, i: 2
count: 192, i: 2
count: 193, i: 2
count: 194, i: 2

In [66]:
import threading
class threadsafe_iter:
    """Takes an iterator/generator and makes it thread-safe by
    serializing call to the `next` method of given iterator/generator.
    """
    def __init__(self, it):
        self.it = it
        self.lock = threading.Lock()

    def __iter__(self):
        return self

    def next(self):
        with self.lock:
            return self.it.next()


def threadsafe_generator(f):
    """A decorator that takes a generator function and makes it thread-safe.
    """
    def g(*a, **kw):
        return threadsafe_iter(f(*a, **kw))
    return g

@threadsafe_generator
def __create_generator_from_features(feature, generator):  # write the definition of your data generator
    for feature, class_index in zip(features, generator.classes):
        label = np.zeros(generator.nb_class)
        label[class_index] = 1
        yield (feature, label)
    
    """
    (X_train, y_train), (X_test, y_test) = mnist.load_data()
    y_train = np_utils.to_categorical(y_train,10)
    X_train = X_train.reshape(X_train.shape[0], 1, 28, 28)
    X_test = X_test.reshape(X_test.shape[0], 1, 28, 28)
    X_train = X_train.astype('float32')
    X_test = X_test.astype('float32')
    X_train /= 255
    X_test /= 255
    while 1:
        for i in range(1875):
            yield X_train[i*32:(i+1)*32], y_train[i*32:(i+1)*32]
        # print("Came here")
    """

#@threadsafe_generator
from keras.datasets import mnist
from keras.utils import np_utils
def mygenerator():  # write the definition of your data generator
    (X_train, y_train), (X_test, y_test) = mnist.load_data()
    y_train = np_utils.to_categorical(y_train,10)
    print("y_train.shape: {}".format(y_train.shape))
    X_train = X_train.reshape(X_train.shape[0], 1, 28, 28)
    print("X_train.shape: {}".format(X_train.shape))
    X_test = X_test.reshape(X_test.shape[0], 1, 28, 28)
    X_train = X_train.astype('float32')
    print("X_train.shape: {}".format(X_train.shape))
    X_test = X_test.astype('float32')
    X_train /= 255
    print("X_train.shape: {}".format(X_train.shape))
    X_test /= 255
    while 1:
        for i in range(1875):
            print(i)
            yield X_train[i*32:(i+1)*32], y_train[i*32:(i+1)*32]

count = 0
for i in mygenerator():
    x, y = i
    print("count: {}, x.shape: {}, y.shape: {}".format(count, x.shape, y.shape))
    #print(">> x: {}".format(x))
    #print(">> y: {}".format(y)'break)
    break

        
#train_feature_generator = __create_generator_from_features(train_features, train_generator)

"""
count = 0
for i in train_feature_generator:
    x, y = i
    count = count + 1
    print("count: {}, x.shape: {}, y.shape: {}".format(count, x.shape, y.shape))
    print("type of y: {}".format(type(y)))
"""


y_train.shape: (60000, 10)
X_train.shape: (60000, 1, 28, 28)
X_train.shape: (60000, 1, 28, 28)
X_train.shape: (60000, 1, 28, 28)
0
count: 0, x.shape: (32, 1, 28, 28), y.shape: (32, 10)
Out[66]:
'\ncount = 0\nfor i in train_feature_generator:\n    x, y = i\n    count = count + 1\n    print("count: {}, x.shape: {}, y.shape: {}".format(count, x.shape, y.shape))\n    print("type of y: {}".format(type(y)))\n'

In [87]:
labels = np.zeros((194, 3))
count = 0
for i, class_index in zip(range(0, 194), train_generator.classes):
    labels[i][class_index] = 1
    count = count + 1
    #print("count: {}, i: {}, class_index: {}, labels[{}]: {}".format(count, i, class_index, i, labels[i]))


count: 1, i: 0, class_index: 0, labels[0]: [ 1.  0.  0.]
count: 2, i: 1, class_index: 0, labels[1]: [ 1.  0.  0.]
count: 3, i: 2, class_index: 0, labels[2]: [ 1.  0.  0.]
count: 4, i: 3, class_index: 0, labels[3]: [ 1.  0.  0.]
count: 5, i: 4, class_index: 0, labels[4]: [ 1.  0.  0.]
count: 6, i: 5, class_index: 0, labels[5]: [ 1.  0.  0.]
count: 7, i: 6, class_index: 0, labels[6]: [ 1.  0.  0.]
count: 8, i: 7, class_index: 0, labels[7]: [ 1.  0.  0.]
count: 9, i: 8, class_index: 0, labels[8]: [ 1.  0.  0.]
count: 10, i: 9, class_index: 0, labels[9]: [ 1.  0.  0.]
count: 11, i: 10, class_index: 0, labels[10]: [ 1.  0.  0.]
count: 12, i: 11, class_index: 0, labels[11]: [ 1.  0.  0.]
count: 13, i: 12, class_index: 0, labels[12]: [ 1.  0.  0.]
count: 14, i: 13, class_index: 0, labels[13]: [ 1.  0.  0.]
count: 15, i: 14, class_index: 0, labels[14]: [ 1.  0.  0.]
count: 16, i: 15, class_index: 0, labels[15]: [ 1.  0.  0.]
count: 17, i: 16, class_index: 0, labels[16]: [ 1.  0.  0.]
count: 18, i: 17, class_index: 0, labels[17]: [ 1.  0.  0.]
count: 19, i: 18, class_index: 0, labels[18]: [ 1.  0.  0.]
count: 20, i: 19, class_index: 0, labels[19]: [ 1.  0.  0.]
count: 21, i: 20, class_index: 0, labels[20]: [ 1.  0.  0.]
count: 22, i: 21, class_index: 0, labels[21]: [ 1.  0.  0.]
count: 23, i: 22, class_index: 0, labels[22]: [ 1.  0.  0.]
count: 24, i: 23, class_index: 0, labels[23]: [ 1.  0.  0.]
count: 25, i: 24, class_index: 0, labels[24]: [ 1.  0.  0.]
count: 26, i: 25, class_index: 0, labels[25]: [ 1.  0.  0.]
count: 27, i: 26, class_index: 0, labels[26]: [ 1.  0.  0.]
count: 28, i: 27, class_index: 0, labels[27]: [ 1.  0.  0.]
count: 29, i: 28, class_index: 0, labels[28]: [ 1.  0.  0.]
count: 30, i: 29, class_index: 0, labels[29]: [ 1.  0.  0.]
count: 31, i: 30, class_index: 0, labels[30]: [ 1.  0.  0.]
count: 32, i: 31, class_index: 0, labels[31]: [ 1.  0.  0.]
count: 33, i: 32, class_index: 0, labels[32]: [ 1.  0.  0.]
count: 34, i: 33, class_index: 0, labels[33]: [ 1.  0.  0.]
count: 35, i: 34, class_index: 0, labels[34]: [ 1.  0.  0.]
count: 36, i: 35, class_index: 0, labels[35]: [ 1.  0.  0.]
count: 37, i: 36, class_index: 0, labels[36]: [ 1.  0.  0.]
count: 38, i: 37, class_index: 0, labels[37]: [ 1.  0.  0.]
count: 39, i: 38, class_index: 0, labels[38]: [ 1.  0.  0.]
count: 40, i: 39, class_index: 0, labels[39]: [ 1.  0.  0.]
count: 41, i: 40, class_index: 0, labels[40]: [ 1.  0.  0.]
count: 42, i: 41, class_index: 0, labels[41]: [ 1.  0.  0.]
count: 43, i: 42, class_index: 0, labels[42]: [ 1.  0.  0.]
count: 44, i: 43, class_index: 0, labels[43]: [ 1.  0.  0.]
count: 45, i: 44, class_index: 0, labels[44]: [ 1.  0.  0.]
count: 46, i: 45, class_index: 0, labels[45]: [ 1.  0.  0.]
count: 47, i: 46, class_index: 0, labels[46]: [ 1.  0.  0.]
count: 48, i: 47, class_index: 0, labels[47]: [ 1.  0.  0.]
count: 49, i: 48, class_index: 0, labels[48]: [ 1.  0.  0.]
count: 50, i: 49, class_index: 0, labels[49]: [ 1.  0.  0.]
count: 51, i: 50, class_index: 0, labels[50]: [ 1.  0.  0.]
count: 52, i: 51, class_index: 0, labels[51]: [ 1.  0.  0.]
count: 53, i: 52, class_index: 0, labels[52]: [ 1.  0.  0.]
count: 54, i: 53, class_index: 0, labels[53]: [ 1.  0.  0.]
count: 55, i: 54, class_index: 0, labels[54]: [ 1.  0.  0.]
count: 56, i: 55, class_index: 0, labels[55]: [ 1.  0.  0.]
count: 57, i: 56, class_index: 0, labels[56]: [ 1.  0.  0.]
count: 58, i: 57, class_index: 0, labels[57]: [ 1.  0.  0.]
count: 59, i: 58, class_index: 0, labels[58]: [ 1.  0.  0.]
count: 60, i: 59, class_index: 0, labels[59]: [ 1.  0.  0.]
count: 61, i: 60, class_index: 0, labels[60]: [ 1.  0.  0.]
count: 62, i: 61, class_index: 0, labels[61]: [ 1.  0.  0.]
count: 63, i: 62, class_index: 0, labels[62]: [ 1.  0.  0.]
count: 64, i: 63, class_index: 1, labels[63]: [ 0.  1.  0.]
count: 65, i: 64, class_index: 1, labels[64]: [ 0.  1.  0.]
count: 66, i: 65, class_index: 1, labels[65]: [ 0.  1.  0.]
count: 67, i: 66, class_index: 1, labels[66]: [ 0.  1.  0.]
count: 68, i: 67, class_index: 1, labels[67]: [ 0.  1.  0.]
count: 69, i: 68, class_index: 1, labels[68]: [ 0.  1.  0.]
count: 70, i: 69, class_index: 1, labels[69]: [ 0.  1.  0.]
count: 71, i: 70, class_index: 1, labels[70]: [ 0.  1.  0.]
count: 72, i: 71, class_index: 1, labels[71]: [ 0.  1.  0.]
count: 73, i: 72, class_index: 1, labels[72]: [ 0.  1.  0.]
count: 74, i: 73, class_index: 1, labels[73]: [ 0.  1.  0.]
count: 75, i: 74, class_index: 1, labels[74]: [ 0.  1.  0.]
count: 76, i: 75, class_index: 1, labels[75]: [ 0.  1.  0.]
count: 77, i: 76, class_index: 1, labels[76]: [ 0.  1.  0.]
count: 78, i: 77, class_index: 1, labels[77]: [ 0.  1.  0.]
count: 79, i: 78, class_index: 1, labels[78]: [ 0.  1.  0.]
count: 80, i: 79, class_index: 1, labels[79]: [ 0.  1.  0.]
count: 81, i: 80, class_index: 1, labels[80]: [ 0.  1.  0.]
count: 82, i: 81, class_index: 1, labels[81]: [ 0.  1.  0.]
count: 83, i: 82, class_index: 1, labels[82]: [ 0.  1.  0.]
count: 84, i: 83, class_index: 1, labels[83]: [ 0.  1.  0.]
count: 85, i: 84, class_index: 1, labels[84]: [ 0.  1.  0.]
count: 86, i: 85, class_index: 1, labels[85]: [ 0.  1.  0.]
count: 87, i: 86, class_index: 1, labels[86]: [ 0.  1.  0.]
count: 88, i: 87, class_index: 1, labels[87]: [ 0.  1.  0.]
count: 89, i: 88, class_index: 1, labels[88]: [ 0.  1.  0.]
count: 90, i: 89, class_index: 1, labels[89]: [ 0.  1.  0.]
count: 91, i: 90, class_index: 1, labels[90]: [ 0.  1.  0.]
count: 92, i: 91, class_index: 1, labels[91]: [ 0.  1.  0.]
count: 93, i: 92, class_index: 1, labels[92]: [ 0.  1.  0.]
count: 94, i: 93, class_index: 1, labels[93]: [ 0.  1.  0.]
count: 95, i: 94, class_index: 1, labels[94]: [ 0.  1.  0.]
count: 96, i: 95, class_index: 1, labels[95]: [ 0.  1.  0.]
count: 97, i: 96, class_index: 1, labels[96]: [ 0.  1.  0.]
count: 98, i: 97, class_index: 1, labels[97]: [ 0.  1.  0.]
count: 99, i: 98, class_index: 1, labels[98]: [ 0.  1.  0.]
count: 100, i: 99, class_index: 1, labels[99]: [ 0.  1.  0.]
count: 101, i: 100, class_index: 1, labels[100]: [ 0.  1.  0.]
count: 102, i: 101, class_index: 1, labels[101]: [ 0.  1.  0.]
count: 103, i: 102, class_index: 1, labels[102]: [ 0.  1.  0.]
count: 104, i: 103, class_index: 1, labels[103]: [ 0.  1.  0.]
count: 105, i: 104, class_index: 1, labels[104]: [ 0.  1.  0.]
count: 106, i: 105, class_index: 1, labels[105]: [ 0.  1.  0.]
count: 107, i: 106, class_index: 1, labels[106]: [ 0.  1.  0.]
count: 108, i: 107, class_index: 1, labels[107]: [ 0.  1.  0.]
count: 109, i: 108, class_index: 1, labels[108]: [ 0.  1.  0.]
count: 110, i: 109, class_index: 1, labels[109]: [ 0.  1.  0.]
count: 111, i: 110, class_index: 1, labels[110]: [ 0.  1.  0.]
count: 112, i: 111, class_index: 1, labels[111]: [ 0.  1.  0.]
count: 113, i: 112, class_index: 1, labels[112]: [ 0.  1.  0.]
count: 114, i: 113, class_index: 1, labels[113]: [ 0.  1.  0.]
count: 115, i: 114, class_index: 1, labels[114]: [ 0.  1.  0.]
count: 116, i: 115, class_index: 1, labels[115]: [ 0.  1.  0.]
count: 117, i: 116, class_index: 1, labels[116]: [ 0.  1.  0.]
count: 118, i: 117, class_index: 1, labels[117]: [ 0.  1.  0.]
count: 119, i: 118, class_index: 1, labels[118]: [ 0.  1.  0.]
count: 120, i: 119, class_index: 2, labels[119]: [ 0.  0.  1.]
count: 121, i: 120, class_index: 2, labels[120]: [ 0.  0.  1.]
count: 122, i: 121, class_index: 2, labels[121]: [ 0.  0.  1.]
count: 123, i: 122, class_index: 2, labels[122]: [ 0.  0.  1.]
count: 124, i: 123, class_index: 2, labels[123]: [ 0.  0.  1.]
count: 125, i: 124, class_index: 2, labels[124]: [ 0.  0.  1.]
count: 126, i: 125, class_index: 2, labels[125]: [ 0.  0.  1.]
count: 127, i: 126, class_index: 2, labels[126]: [ 0.  0.  1.]
count: 128, i: 127, class_index: 2, labels[127]: [ 0.  0.  1.]
count: 129, i: 128, class_index: 2, labels[128]: [ 0.  0.  1.]
count: 130, i: 129, class_index: 2, labels[129]: [ 0.  0.  1.]
count: 131, i: 130, class_index: 2, labels[130]: [ 0.  0.  1.]
count: 132, i: 131, class_index: 2, labels[131]: [ 0.  0.  1.]
count: 133, i: 132, class_index: 2, labels[132]: [ 0.  0.  1.]
count: 134, i: 133, class_index: 2, labels[133]: [ 0.  0.  1.]
count: 135, i: 134, class_index: 2, labels[134]: [ 0.  0.  1.]
count: 136, i: 135, class_index: 2, labels[135]: [ 0.  0.  1.]
count: 137, i: 136, class_index: 2, labels[136]: [ 0.  0.  1.]
count: 138, i: 137, class_index: 2, labels[137]: [ 0.  0.  1.]
count: 139, i: 138, class_index: 2, labels[138]: [ 0.  0.  1.]
count: 140, i: 139, class_index: 2, labels[139]: [ 0.  0.  1.]
count: 141, i: 140, class_index: 2, labels[140]: [ 0.  0.  1.]
count: 142, i: 141, class_index: 2, labels[141]: [ 0.  0.  1.]
count: 143, i: 142, class_index: 2, labels[142]: [ 0.  0.  1.]
count: 144, i: 143, class_index: 2, labels[143]: [ 0.  0.  1.]
count: 145, i: 144, class_index: 2, labels[144]: [ 0.  0.  1.]
count: 146, i: 145, class_index: 2, labels[145]: [ 0.  0.  1.]
count: 147, i: 146, class_index: 2, labels[146]: [ 0.  0.  1.]
count: 148, i: 147, class_index: 2, labels[147]: [ 0.  0.  1.]
count: 149, i: 148, class_index: 2, labels[148]: [ 0.  0.  1.]
count: 150, i: 149, class_index: 2, labels[149]: [ 0.  0.  1.]
count: 151, i: 150, class_index: 2, labels[150]: [ 0.  0.  1.]
count: 152, i: 151, class_index: 2, labels[151]: [ 0.  0.  1.]
count: 153, i: 152, class_index: 2, labels[152]: [ 0.  0.  1.]
count: 154, i: 153, class_index: 2, labels[153]: [ 0.  0.  1.]
count: 155, i: 154, class_index: 2, labels[154]: [ 0.  0.  1.]
count: 156, i: 155, class_index: 2, labels[155]: [ 0.  0.  1.]
count: 157, i: 156, class_index: 2, labels[156]: [ 0.  0.  1.]
count: 158, i: 157, class_index: 2, labels[157]: [ 0.  0.  1.]
count: 159, i: 158, class_index: 2, labels[158]: [ 0.  0.  1.]
count: 160, i: 159, class_index: 2, labels[159]: [ 0.  0.  1.]
count: 161, i: 160, class_index: 2, labels[160]: [ 0.  0.  1.]
count: 162, i: 161, class_index: 2, labels[161]: [ 0.  0.  1.]
count: 163, i: 162, class_index: 2, labels[162]: [ 0.  0.  1.]
count: 164, i: 163, class_index: 2, labels[163]: [ 0.  0.  1.]
count: 165, i: 164, class_index: 2, labels[164]: [ 0.  0.  1.]
count: 166, i: 165, class_index: 2, labels[165]: [ 0.  0.  1.]
count: 167, i: 166, class_index: 2, labels[166]: [ 0.  0.  1.]
count: 168, i: 167, class_index: 2, labels[167]: [ 0.  0.  1.]
count: 169, i: 168, class_index: 2, labels[168]: [ 0.  0.  1.]
count: 170, i: 169, class_index: 2, labels[169]: [ 0.  0.  1.]
count: 171, i: 170, class_index: 2, labels[170]: [ 0.  0.  1.]
count: 172, i: 171, class_index: 2, labels[171]: [ 0.  0.  1.]
count: 173, i: 172, class_index: 2, labels[172]: [ 0.  0.  1.]
count: 174, i: 173, class_index: 2, labels[173]: [ 0.  0.  1.]
count: 175, i: 174, class_index: 2, labels[174]: [ 0.  0.  1.]
count: 176, i: 175, class_index: 2, labels[175]: [ 0.  0.  1.]
count: 177, i: 176, class_index: 2, labels[176]: [ 0.  0.  1.]
count: 178, i: 177, class_index: 2, labels[177]: [ 0.  0.  1.]
count: 179, i: 178, class_index: 2, labels[178]: [ 0.  0.  1.]
count: 180, i: 179, class_index: 2, labels[179]: [ 0.  0.  1.]
count: 181, i: 180, class_index: 2, labels[180]: [ 0.  0.  1.]
count: 182, i: 181, class_index: 2, labels[181]: [ 0.  0.  1.]
count: 183, i: 182, class_index: 2, labels[182]: [ 0.  0.  1.]
count: 184, i: 183, class_index: 2, labels[183]: [ 0.  0.  1.]
count: 185, i: 184, class_index: 2, labels[184]: [ 0.  0.  1.]
count: 186, i: 185, class_index: 2, labels[185]: [ 0.  0.  1.]
count: 187, i: 186, class_index: 2, labels[186]: [ 0.  0.  1.]
count: 188, i: 187, class_index: 2, labels[187]: [ 0.  0.  1.]
count: 189, i: 188, class_index: 2, labels[188]: [ 0.  0.  1.]
count: 190, i: 189, class_index: 2, labels[189]: [ 0.  0.  1.]
count: 191, i: 190, class_index: 2, labels[190]: [ 0.  0.  1.]
count: 192, i: 191, class_index: 2, labels[191]: [ 0.  0.  1.]
count: 193, i: 192, class_index: 2, labels[192]: [ 0.  0.  1.]
count: 194, i: 193, class_index: 2, labels[193]: [ 0.  0.  1.]
Out[87]:
'        \n\ndef get_train_feature_generator():\n    # must return x in the form of (32, 1, 28, 28)\n    # must return y in the form of (32, 3)\n    while True:\n        for i in range()\n        \n        \ny = np.zeros() \ntrain_generator.classes:\n'

In [97]:
print(train_features[0])


[[[ 0.05949762  0.11966778  0.22653553 ...,  0.10000877  0.01944498
    0.15126897]]]

In [96]:
# 194 = train_generator.nb_sample
# 32 = batch_size


def get_train_feature_generator():
    # must return x in the form of (32, 1, 28, 28)
    # must return y in the form of (32, 3)
    while True:
        for i in range(int(194/32)):
            yield train_features[i*32:(i+1)*32], labels[i*32:(i+1)*32]

In [117]:
train_features_generator = get_train_feature_generator()

top_model.fit_generator(
    generator=train_features_generator,
    samples_per_epoch=32,
    nb_epoch=10, 
    #callbacks=callbacks_list,
    verbose=2
)


Epoch 1/10
0s - loss: 1.1921e-07 - acc: 1.0000
Epoch 2/10
0s - loss: 0.5037 - acc: 0.9688
Epoch 3/10
0s - loss: 16.1181 - acc: 0.0000e+00
Epoch 4/10
0s - loss: 16.1181 - acc: 0.0000e+00
Epoch 5/10
0s - loss: 16.1181 - acc: 0.0000e+00
Epoch 6/10
0s - loss: 16.1181 - acc: 0.0000e+00
Epoch 7/10
0s - loss: 1.1921e-07 - acc: 1.0000
Epoch 8/10
0s - loss: 0.5037 - acc: 0.9688
Epoch 9/10
0s - loss: 16.1181 - acc: 0.0000e+00
Epoch 10/10
0s - loss: 16.1181 - acc: 0.0000e+00
Out[117]:
<keras.callbacks.History at 0x7f3bc82376d8>

In [116]:
top_model.fit_generator(
    generator=train_features_generator,
    samples_per_epoch=32,
    nb_epoch=10, 
    callbacks=callbacks_list,
    verbose=2
)


Epoch 1/10
---------------------------------------------------------------------------
KeyError                                  Traceback (most recent call last)
<ipython-input-116-7914d73f66c3> in <module>()
      4     nb_epoch=10,
      5     callbacks=callbacks_list,
----> 6     verbose=2
      7 )

/opt/conda/lib/python3.5/site-packages/keras/models.py in fit_generator(self, generator, samples_per_epoch, nb_epoch, verbose, callbacks, validation_data, nb_val_samples, class_weight, max_q_size, nb_worker, pickle_safe, **kwargs)
    922                                         max_q_size=max_q_size,
    923                                         nb_worker=nb_worker,
--> 924                                         pickle_safe=pickle_safe)
    925 
    926     def evaluate_generator(self, generator, val_samples,

/opt/conda/lib/python3.5/site-packages/keras/engine/training.py in fit_generator(self, generator, samples_per_epoch, nb_epoch, verbose, callbacks, validation_data, nb_val_samples, class_weight, max_q_size, nb_worker, pickle_safe, initial_epoch)
   1552                         epoch_logs['val_' + l] = o
   1553 
-> 1554             callbacks.on_epoch_end(epoch, epoch_logs)
   1555             epoch += 1
   1556             if callback_model.stop_training:

/opt/conda/lib/python3.5/site-packages/keras/callbacks.py in on_epoch_end(self, epoch, logs)
     41     def on_epoch_end(self, epoch, logs={}):
     42         for callback in self.callbacks:
---> 43             callback.on_epoch_end(epoch, logs)
     44 
     45     def on_batch_begin(self, batch, logs={}):

/opt/conda/lib/python3.5/site-packages/keras/callbacks.py in on_epoch_end(self, epoch, logs)
    286         if self.epochs_since_last_save >= self.period:
    287             self.epochs_since_last_save = 0
--> 288             filepath = self.filepath.format(epoch=epoch, **logs)
    289             if self.save_best_only:
    290                 current = logs.get(self.monitor)

KeyError: 'val_acc'

In [118]:
val_features, val_features_path = __extract_features(val_generator, model, batch_size, "validation")

val_labels = np.zeros((32, 3))

for i, class_index in zip(range(0, 32), val_generator.classes):
    val_labels[i][class_index] = 1

print("val_features: {}, val_labels: {}".format(val_features.shape, val_labels.shape))

score = top_model.evaluate(
    x=val_features,
    y=val_labels,
    batch_size=batch_size,
)

print("{}: {}".format(
    top_model.metrics_names[1],
    score[1]
))


create a feature directory to store saved features: ./features/
extract features from convolutional model based on data
generator: validation_generator
batch_size: 32
save features to ./features/REALDEAL_DGCarsDataset_ResNet50_validation_features_20170216_23_09_44.npy
val_features: (32, 1, 1, 2048), val_labels: (32, 3)
32/32 [==============================] - 0s
acc: 1.0

In [103]:
print(val_features.shape)


(32, 1, 1, 2048)

In [107]:
val_labels


---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
<ipython-input-107-df93e4c7b643> in <module>()
----> 1 val_labels

NameError: name 'val_labels' is not defined

3. Train entire model with data


In [119]:
from keras.models import Model                              

model = ResNet50(
            include_top=False,
            weights="imagenet",
            input_tensor=Input(
                shape=(
                    img_height, 
                    img_width, 
                    img_dimension
                )
            )
        )

for layer in model.layers:
    layer.trainable = True

In [120]:
combined_model = Model(input=model.input, output=top_model(model.output))

In [121]:
combined_model.compile(
    loss="categorical_crossentropy",
    optimizer="adam",
    metrics=["accuracy"]
)

In [123]:
combined_model.fit_generator(
    generator=train_generator,
    samples_per_epoch=32,
    nb_epoch=10, 
    #callbacks=callbacks_list,
    verbose=2
)


Epoch 1/10
/opt/conda/lib/python3.5/site-packages/keras/engine/training.py:1527: UserWarning: Epoch comprised more than `samples_per_epoch` samples, which might affect learning results. Set `samples_per_epoch` correctly to avoid this warning.
  warnings.warn('Epoch comprised more than '
10s - loss: 9.4812 - acc: 0.4118
Epoch 2/10
1s - loss: 9.0664 - acc: 0.4375
Epoch 3/10
0s - loss: 12.5923 - acc: 0.2188
Epoch 4/10
0s - loss: 12.0886 - acc: 0.2500
Epoch 5/10
0s - loss: 9.5701 - acc: 0.4062
Epoch 6/10
0s - loss: 13.5996 - acc: 0.1562
Epoch 7/10
0s - loss: 11.3775 - acc: 0.2941
Epoch 8/10
0s - loss: 10.5775 - acc: 0.3438
Epoch 9/10
0s - loss: 11.5849 - acc: 0.2812
Epoch 10/10
0s - loss: 9.5701 - acc: 0.4062
Out[123]:
<keras.callbacks.History at 0x7f39cb10aeb8>

In [125]:
score = combined_model.evaluate_generator(
    generator=val_generator,
    val_samples=val_generator.nb_sample
)

print("{}: {}".format(
    combined_model.metrics_names[1],
    score[1]
))


acc: 0.3112582793298936

In [ ]: